tICA notes:
Also equivalent to the generalized eigenvalue problem: $$ C^{(\tau)} \vec{v} = \lambda \Sigma \vec{v}$$
In [6]:
import numpy as np
import numpy.random as npr
import pylab as pl
from sklearn.decomposition import PCA
from sklearn.cross_decomposition import CCA
from scipy import linalg
In [7]:
def time_lag_corr_cov(X,tau=1):
mu = (X[:-tau].mean(0) + X[tau:].mean(0)) / 2
X_ = X-mu
M = len(X) - tau
dim = len(X.T)
corr = np.zeros((dim,dim))
cov = np.zeros((dim,dim))
for i in range(M):
corr += np.outer(X_[i],X_[i+tau]) + np.outer(X_[i+tau],X_[i])
cov += np.outer(X_[i],X_[i]) + np.outer(X_[i+tau],X_[i+tau])
return corr / (2.0*M),cov / (2.0*M)
In [8]:
def time_lagged_K(kernel_matrix,tau=1):
# to-do: accept either a kernel function or a precomputed kernel matrix
# for now, just accept a precomputed kernel matrix
M = len(kernel_matrix) - tau
K = np.zeros((2*M,2*M))
#for i in range(M):
#for j in range(M):
#K[i,j] = kernel(X[i],X[j])
#K[i,j+M] = kernel(X[i],X[j+tau])
#K[i+M,j] = kernel(X[i+tau],X[j])
#K[i+M,j+M] = kernel(X[i+tau],X[j+tau])
K[:M,:M] = kernel_matrix[:M,:M]
K[:M,M:] = kernel_matrix[:M,tau:]
K[M:,:M] = kernel_matrix[tau:,:M]
K[M:,M:] = kernel_matrix[tau:,tau:]
return K
In [9]:
def kpca(kernel_matrix,n_components=2,c=None):
M=len(kernel_matrix)-1
evals,evecs=linalg.eigh(kernel_matrix,eigvals=(M-n_components,M))
order = sorted(range(len(evals)),key=lambda i:-evals[i])
alphas = evecs[:,order]
lambdas = evals[order]
pl.plot(range(1,len(lambdas)+1),lambdas)
sqrt_lambdas = np.diag(np.sqrt(lambdas))
if c==None:
c=range(M+1)
projection = alphas.dot(sqrt_lambdas)
pl.figure()
pl.scatter(projection[:,1],projection[:,2],c=c[:M+1],linewidths=0,alpha=0.5)
pl.figure()
pl.scatter(projection[:,2],projection[:,3],c=c[:M+1],linewidths=0,alpha=0.5)
pl.figure()
pl.scatter(projection[:,1],projection[:,3],c=c[:M+1],linewidths=0,alpha=0.5)
return lambdas,alphas
In [ ]:
In [ ]:
pl.show()